Skip to content

Springback UV map#

import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
plt.rcParams['figure.dpi'] = 150

from mesh_predictor import ProjectionPredictor
doe = pd.read_csv('../data/doe.csv')
doe.describe()
doe_id Material_ID Blechdicke Niederhalterkraft Ziehspalt Stempel_ID Einlegeposition Ziehtiefe Breite UG OG E Rp0 Rp0.2 Rp100 Rp25 Rp50
count 1000.000000 1000.000000 1000.000000 1000.00000 1000.00000 1000.000000 1000.000000 1000.000000 1000.00000 1000.000000 1000.000000 1000.000000 1000.000000 1000.000000 1000.000000 1000.000000 1000.000000
mean 500.500000 3.866000 1.188370 251.12000 2.17280 2.716000 -0.422000 45.000000 70.65440 1.548120 1.809720 129.102675 209.942033 217.051735 481.885749 401.776216 442.422026
std 288.819436 1.637292 0.224187 147.00325 0.36093 0.451162 3.326708 16.591422 0.72186 0.257163 0.302279 58.332400 56.832894 54.073664 112.525169 61.073415 80.305188
min 1.000000 1.000000 0.990000 10.00000 1.60000 2.000000 -5.000000 30.000000 70.20000 1.140000 1.330000 71.681980 133.182632 147.101263 384.123147 369.820589 383.041834
25% 250.750000 3.000000 1.000000 120.00000 1.60000 2.000000 -3.250000 30.000000 70.20000 1.140000 1.330000 71.681980 138.226960 147.601859 384.123147 369.820589 383.041834
50% 500.500000 4.000000 1.010000 250.00000 2.40000 3.000000 -0.500000 40.000000 70.20000 1.710000 2.000000 118.282190 235.160326 237.951493 451.324586 374.608673 415.842304
75% 750.250000 5.000000 1.480000 380.00000 2.40000 3.000000 2.000000 55.000000 71.80000 1.710000 2.000000 191.372450 248.796491 251.315902 534.002871 385.773439 449.528189
max 1000.000000 6.000000 1.480000 500.00000 2.40000 3.000000 5.000000 70.000000 71.80000 1.710000 2.000000 200.038130 296.556500 306.211480 711.700600 556.006850 629.530400
data = pd.read_csv('../data/springback_uvmap.csv')
data.drop(data[data.doe_id == 1000].index, inplace=True)
data.drop(data[data.doe_id == 247].index, inplace=True)
data
doe_id u v x y z thickness epseqpl thinning
0 1 0.000000 0.000000 -132.603620 -117.544130 0.895656 1.000417 0.000823 0.990512
1 1 0.500000 0.019171 -0.000088 -87.060616 0.489975 0.991176 0.010210 0.981363
2 1 1.000000 0.036529 137.002380 -104.238950 0.742256 1.000619 0.001208 0.990712
3 1 1.000000 0.000000 132.603620 -117.544130 0.895656 1.000417 0.000823 0.990512
4 1 0.500000 0.000000 -0.000141 -95.070107 0.536487 0.992336 0.007779 0.982511
... ... ... ... ... ... ... ... ... ...
16721570 998 0.254233 0.654230 -89.967537 28.110500 35.537619 1.354696 0.277900 0.915335
16721571 998 0.255316 0.641760 -89.548157 28.170677 37.664772 1.345915 0.279300 0.909402
16721572 998 0.255497 0.628215 -89.451439 28.237469 39.968052 1.340451 0.279300 0.905710
16721573 998 0.254664 0.617541 -89.724800 28.299198 41.775429 1.393057 0.217900 0.941255
16721574 998 0.254648 0.605025 -89.672142 28.375393 43.917314 1.420526 0.182900 0.959815

16697652 rows × 9 columns

reg = ProjectionPredictor()
reg.load_data(
    doe = doe,
    data = data,
    index='doe_id',
    process_parameters = [
        'Blechdicke', 
        'Niederhalterkraft', 
        'Ziehspalt', 
        'Einlegeposition', 
        'Ziehtiefe',
        'Rp0',
    ],
    categorical = [
        'Ziehspalt', 
        'Ziehtiefe',
    ],
    position = ['u', 'v'],
    output = ['x', 'y', 'z'] ,#, 'thickness', 'epseqpl', 'thinning']
    validation_split=0.1,
    validation_method='leaveoneout',
    position_scaler='minmax'
)
reg.save_config("../models/springback_uvmap_xyz.pkl")
reg.data_summary()
Data summary
------------------------------------------------------------

Process parameters:
    - Blechdicke : numerical [ 0.99  ...  1.48 ]
    - Niederhalterkraft : numerical [ 10  ...  500 ]
    - Ziehspalt : categorical [1.6, 2.4]
    - Einlegeposition : numerical [ -5  ...  5 ]
    - Ziehtiefe : categorical [30, 50, 70]
    - Rp0 : numerical [ 133.18263199999998  ...  296.5565 ]
Input variables:
    - u : numerical, [ 0.0 / 1.0 ] 
    - v : numerical, [ 0.0 / 1.0 ] 
Output variable(s):
    - x : numerical, [ -202.42731 / 202.42731 ]
    - y : numerical, [ -118.32256 / 96.918961 ]
    - z : numerical, [ 0.0 / 73.213165 ]

Inputs (16697652, 11)
Outputs (16697652, 3)
Total number of experiments: 725
Total number of samples: 16697652
Number of training samples: 15068936
Number of test samples: 1628716
Number of experiments in the test set: 72

config = {
    'batch_size': 2048*16,
    'max_epochs': 100,
    'layers': [256, 256, 256, 256, 256],
    'dropout': 0.0,
    'learning_rate': 0.001,
    'activation': 'lrelu'
}

reg.custom_model(save_path='../models/best_uv_xyz_model', config=config, verbose=True)
reg.training_summary()
Metal device set to: Apple M1 Pro

systemMemory: 16.00 GB
maxCacheSize: 5.33 GB

Model: "sequential"
_________________________________________________________________
 Layer (type)                Output Shape              Param #   
=================================================================
 dense (Dense)               (None, 256)               3328      

 leaky_re_lu (LeakyReLU)     (None, 256)               0         

 dense_1 (Dense)             (None, 256)               65792     

 leaky_re_lu_1 (LeakyReLU)   (None, 256)               0         

 dense_2 (Dense)             (None, 256)               65792     

 leaky_re_lu_2 (LeakyReLU)   (None, 256)               0         

 dense_3 (Dense)             (None, 256)               65792     

 leaky_re_lu_3 (LeakyReLU)   (None, 256)               0         

 dense_4 (Dense)             (None, 256)               65792     

 leaky_re_lu_4 (LeakyReLU)   (None, 256)               0         

 dense_5 (Dense)             (None, 3)                 771       

=================================================================
Total params: 267,267
Trainable params: 267,267
Non-trainable params: 0
_________________________________________________________________
Epoch 1/100
459/459 [==============================] - 19s 39ms/step - loss: 0.0065 - val_loss: 2.3933e-04
Epoch 2/100
459/459 [==============================] - 17s 38ms/step - loss: 2.1508e-04 - val_loss: 1.2952e-04
Epoch 3/100
459/459 [==============================] - 17s 38ms/step - loss: 1.5088e-04 - val_loss: 4.7396e-04
Epoch 4/100
459/459 [==============================] - 17s 37ms/step - loss: 1.4415e-04 - val_loss: 8.8372e-05
Epoch 5/100
459/459 [==============================] - 17s 37ms/step - loss: 1.2663e-04 - val_loss: 7.7546e-05
Epoch 6/100
459/459 [==============================] - 17s 37ms/step - loss: 1.1726e-04 - val_loss: 3.9939e-04
Epoch 7/100
459/459 [==============================] - 17s 37ms/step - loss: 1.0719e-04 - val_loss: 2.9736e-04
Epoch 8/100
459/459 [==============================] - 17s 37ms/step - loss: 9.6851e-05 - val_loss: 4.9412e-05
Epoch 9/100
459/459 [==============================] - 17s 37ms/step - loss: 9.6222e-05 - val_loss: 1.2518e-04
Epoch 10/100
459/459 [==============================] - 17s 37ms/step - loss: 8.8397e-05 - val_loss: 5.4822e-05
Epoch 11/100
459/459 [==============================] - 17s 37ms/step - loss: 8.1879e-05 - val_loss: 5.2642e-05
Epoch 12/100
459/459 [==============================] - 17s 37ms/step - loss: 8.0886e-05 - val_loss: 4.1507e-05
Epoch 13/100
459/459 [==============================] - 17s 37ms/step - loss: 7.3210e-05 - val_loss: 5.6658e-05
Epoch 14/100
459/459 [==============================] - 17s 37ms/step - loss: 6.9395e-05 - val_loss: 1.0389e-04
Epoch 15/100
459/459 [==============================] - 17s 37ms/step - loss: 6.8788e-05 - val_loss: 3.8324e-05
Epoch 16/100
459/459 [==============================] - 17s 37ms/step - loss: 6.2494e-05 - val_loss: 3.6982e-05
Epoch 17/100
459/459 [==============================] - 17s 37ms/step - loss: 6.2144e-05 - val_loss: 8.8460e-05
Epoch 18/100
459/459 [==============================] - 17s 37ms/step - loss: 5.6566e-05 - val_loss: 1.5696e-04
Epoch 19/100
459/459 [==============================] - 17s 37ms/step - loss: 5.4330e-05 - val_loss: 3.1707e-04
Epoch 20/100
459/459 [==============================] - 17s 37ms/step - loss: 5.3039e-05 - val_loss: 8.3380e-05
Epoch 21/100
459/459 [==============================] - 17s 37ms/step - loss: 5.0385e-05 - val_loss: 5.2483e-05
Epoch 22/100
459/459 [==============================] - 17s 37ms/step - loss: 4.8897e-05 - val_loss: 1.4480e-04
Epoch 23/100
459/459 [==============================] - 17s 37ms/step - loss: 4.7880e-05 - val_loss: 5.4595e-05
Epoch 24/100
459/459 [==============================] - 17s 37ms/step - loss: 4.5300e-05 - val_loss: 5.1525e-05
Epoch 25/100
459/459 [==============================] - 17s 37ms/step - loss: 4.3454e-05 - val_loss: 1.5018e-04
Epoch 26/100
459/459 [==============================] - 17s 37ms/step - loss: 3.9921e-05 - val_loss: 6.7408e-05
Epoch 27/100
459/459 [==============================] - 17s 38ms/step - loss: 4.1407e-05 - val_loss: 2.3157e-05
Epoch 28/100
459/459 [==============================] - 17s 38ms/step - loss: 3.8681e-05 - val_loss: 2.7831e-05
Epoch 29/100
459/459 [==============================] - 17s 37ms/step - loss: 3.8090e-05 - val_loss: 3.6708e-05
Epoch 30/100
459/459 [==============================] - 17s 38ms/step - loss: 3.8365e-05 - val_loss: 3.5930e-05
Epoch 31/100
459/459 [==============================] - 17s 38ms/step - loss: 3.5000e-05 - val_loss: 2.5402e-05
Epoch 32/100
459/459 [==============================] - 17s 37ms/step - loss: 3.3936e-05 - val_loss: 7.3974e-05
Epoch 33/100
459/459 [==============================] - 17s 37ms/step - loss: 3.4537e-05 - val_loss: 5.2942e-05
Epoch 34/100
459/459 [==============================] - 17s 38ms/step - loss: 3.2747e-05 - val_loss: 3.4904e-05
Epoch 35/100
459/459 [==============================] - 17s 38ms/step - loss: 3.0611e-05 - val_loss: 7.0845e-05
Epoch 36/100
459/459 [==============================] - 17s 37ms/step - loss: 3.1225e-05 - val_loss: 2.8233e-05
Epoch 37/100
459/459 [==============================] - 17s 37ms/step - loss: 3.0642e-05 - val_loss: 5.9285e-05
Epoch 38/100
459/459 [==============================] - 18s 38ms/step - loss: 3.0918e-05 - val_loss: 2.4387e-05
Epoch 39/100
459/459 [==============================] - 17s 38ms/step - loss: 2.6794e-05 - val_loss: 5.6879e-05
Epoch 40/100
459/459 [==============================] - 18s 38ms/step - loss: 2.6904e-05 - val_loss: 2.2034e-05
Epoch 41/100
459/459 [==============================] - 18s 39ms/step - loss: 2.8363e-05 - val_loss: 1.9228e-05
Epoch 42/100
459/459 [==============================] - 17s 37ms/step - loss: 2.5935e-05 - val_loss: 1.8365e-05
Epoch 43/100
459/459 [==============================] - 17s 37ms/step - loss: 2.5601e-05 - val_loss: 2.0320e-05
Epoch 44/100
459/459 [==============================] - 17s 38ms/step - loss: 2.4581e-05 - val_loss: 1.6876e-05
Epoch 45/100
459/459 [==============================] - 17s 37ms/step - loss: 2.4224e-05 - val_loss: 3.2437e-05
Epoch 46/100
459/459 [==============================] - 17s 37ms/step - loss: 2.4344e-05 - val_loss: 3.2575e-05
Epoch 47/100
459/459 [==============================] - 17s 37ms/step - loss: 2.3265e-05 - val_loss: 1.8261e-05
Epoch 48/100
459/459 [==============================] - 17s 37ms/step - loss: 2.3004e-05 - val_loss: 7.4272e-05
Epoch 49/100
459/459 [==============================] - 17s 37ms/step - loss: 2.1939e-05 - val_loss: 4.5706e-05
Epoch 50/100
459/459 [==============================] - 17s 37ms/step - loss: 2.2234e-05 - val_loss: 8.5515e-05
Epoch 51/100
459/459 [==============================] - 17s 37ms/step - loss: 2.2141e-05 - val_loss: 1.8728e-05
Epoch 52/100
459/459 [==============================] - 18s 38ms/step - loss: 2.0612e-05 - val_loss: 1.8351e-05
Epoch 53/100
459/459 [==============================] - 17s 38ms/step - loss: 2.0568e-05 - val_loss: 9.5710e-05
Epoch 54/100
459/459 [==============================] - 18s 39ms/step - loss: 2.1903e-05 - val_loss: 4.5377e-05
Epoch 55/100
459/459 [==============================] - 18s 39ms/step - loss: 1.8237e-05 - val_loss: 1.7892e-05
Epoch 56/100
459/459 [==============================] - 17s 38ms/step - loss: 2.0009e-05 - val_loss: 1.7274e-05
Epoch 57/100
459/459 [==============================] - 18s 38ms/step - loss: 1.9111e-05 - val_loss: 1.5208e-05
Epoch 58/100
459/459 [==============================] - 17s 38ms/step - loss: 1.9308e-05 - val_loss: 2.3892e-05
Epoch 59/100
459/459 [==============================] - 17s 38ms/step - loss: 1.7526e-05 - val_loss: 3.0991e-05
Epoch 60/100
459/459 [==============================] - 17s 38ms/step - loss: 1.8785e-05 - val_loss: 1.2934e-05
Epoch 61/100
459/459 [==============================] - 17s 38ms/step - loss: 1.7114e-05 - val_loss: 1.3040e-05
Epoch 62/100
459/459 [==============================] - 17s 38ms/step - loss: 1.8372e-05 - val_loss: 1.7121e-05
Epoch 63/100
459/459 [==============================] - 17s 37ms/step - loss: 1.8002e-05 - val_loss: 2.1928e-05
Epoch 64/100
459/459 [==============================] - 17s 37ms/step - loss: 1.6018e-05 - val_loss: 1.4474e-05
Epoch 65/100
459/459 [==============================] - 17s 37ms/step - loss: 1.6718e-05 - val_loss: 2.2557e-05
Epoch 66/100
459/459 [==============================] - 17s 37ms/step - loss: 1.6536e-05 - val_loss: 1.5127e-05
Epoch 67/100
459/459 [==============================] - 17s 37ms/step - loss: 1.6468e-05 - val_loss: 3.3876e-05
Epoch 68/100
459/459 [==============================] - 17s 37ms/step - loss: 1.5820e-05 - val_loss: 1.4745e-05
Epoch 69/100
459/459 [==============================] - 17s 37ms/step - loss: 1.5388e-05 - val_loss: 1.6929e-05
Epoch 70/100
459/459 [==============================] - 17s 37ms/step - loss: 1.5672e-05 - val_loss: 1.3091e-05
Epoch 71/100
459/459 [==============================] - 17s 37ms/step - loss: 1.5435e-05 - val_loss: 3.4419e-05
Epoch 72/100
459/459 [==============================] - 17s 37ms/step - loss: 1.4883e-05 - val_loss: 1.5086e-05
Epoch 73/100
459/459 [==============================] - 17s 37ms/step - loss: 1.5241e-05 - val_loss: 1.3874e-05
Epoch 74/100
459/459 [==============================] - 178s 388ms/step - loss: 1.4886e-05 - val_loss: 1.2446e-05
Epoch 75/100
459/459 [==============================] - 17s 38ms/step - loss: 1.4118e-05 - val_loss: 1.2806e-05
Epoch 76/100
459/459 [==============================] - 17s 38ms/step - loss: 1.4447e-05 - val_loss: 1.5676e-05
Epoch 77/100
459/459 [==============================] - 17s 37ms/step - loss: 1.3738e-05 - val_loss: 1.5677e-05
Epoch 78/100
459/459 [==============================] - 17s 37ms/step - loss: 1.4484e-05 - val_loss: 1.3360e-05
Epoch 79/100
459/459 [==============================] - 17s 37ms/step - loss: 1.2965e-05 - val_loss: 3.9755e-05
Epoch 80/100
459/459 [==============================] - 17s 37ms/step - loss: 1.3671e-05 - val_loss: 1.7337e-05
Epoch 81/100
459/459 [==============================] - 17s 38ms/step - loss: 1.3768e-05 - val_loss: 1.2291e-05
Epoch 82/100
459/459 [==============================] - 17s 38ms/step - loss: 1.3414e-05 - val_loss: 1.4601e-05
Epoch 83/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2840e-05 - val_loss: 1.2134e-05
Epoch 84/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2685e-05 - val_loss: 1.1668e-05
Epoch 85/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2562e-05 - val_loss: 3.7836e-05
Epoch 86/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2963e-05 - val_loss: 2.6492e-05
Epoch 87/100
459/459 [==============================] - 17s 38ms/step - loss: 1.3095e-05 - val_loss: 2.5658e-05
Epoch 88/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1757e-05 - val_loss: 1.1484e-05
Epoch 89/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2109e-05 - val_loss: 1.5168e-05
Epoch 90/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2602e-05 - val_loss: 1.4846e-05
Epoch 91/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1442e-05 - val_loss: 1.9128e-05
Epoch 92/100
459/459 [==============================] - 912s 2s/step - loss: 1.1339e-05 - val_loss: 1.2121e-05
Epoch 93/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2034e-05 - val_loss: 2.3578e-05
Epoch 94/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1260e-05 - val_loss: 2.6563e-05
Epoch 95/100
459/459 [==============================] - 17s 38ms/step - loss: 1.2027e-05 - val_loss: 1.1131e-05
Epoch 96/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1028e-05 - val_loss: 1.1175e-05
Epoch 97/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1167e-05 - val_loss: 1.1884e-05
Epoch 98/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1407e-05 - val_loss: 1.6705e-05
Epoch 99/100
459/459 [==============================] - 17s 38ms/step - loss: 1.0330e-05 - val_loss: 1.2747e-05
Epoch 100/100
459/459 [==============================] - 17s 38ms/step - loss: 1.1878e-05 - val_loss: 1.2171e-05
52/52 [==============================] - 1s 14ms/step - loss: 1.1131e-05
INFO:tensorflow:Assets written to: ../models/best_uv_xyz_model/assets
Validation mse: 1.1130633538414259e-05

reg.load_network('../models/best_uv_xyz_model')
Metal device set to: Apple M1 Pro

systemMemory: 16.00 GB
maxCacheSize: 5.33 GB


idx = np.random.choice(data['doe_id'].unique()) 
print("Doe_ID", idx)
reg.compare(440)
Doe_ID 550

idx = np.random.choice(data['doe_id'].unique()) 
print("Doe_ID", idx)
reg.compare_xyz(idx)
Doe_ID 909

import plotly
import plotly.graph_objs as go
plotly.offline.init_notebook_mode()

def plotly_show_surface(x, y):
    # Configure the trace.
    trace = go.Scatter3d(
        x=y[0, :, :].flatten(),  
        y=y[1, :, :].flatten(),
        z=y[2, :, :].flatten(),
        mode='markers',
        marker={
            'size': 0.2,
            'opacity': 0.8,
        }
    )

    # Configure the layout.
    layout = go.Layout(
        margin={'l': 0, 'r': 0, 'b': 0, 't': 0}
    )

    data = [trace]

    plot_figure = go.Figure(data=data, layout=layout)

    # Render the plot.
    plotly.offline.iplot(plot_figure)


%matplotlib inline
plt.rcParams['figure.dpi'] = 150

def mpl_show_surface(x, y):

    fig = plt.figure()
    ax = fig.add_subplot(111, projection='3d')
    p = ax.scatter(
        y[0, :, :].flatten(),  
        y[1, :, :].flatten(),
        y[2, :, :].flatten(),
        s=0.001
    )
x, y = reg.predict({
        'Blechdicke': 1.01, 
        'Niederhalterkraft': 410.0, 
        'Ziehspalt': 2.4, 
        'Einlegeposition': -5, 
        'Ziehtiefe': 30,
        'Stempel_ID': 3,
        'E': 191.37245,
        'Rp0': 138.22696,
        'Rp50': 449.528189,
    }, 
    shape=(500, 500))

plotly_show_surface(x, y)
reg.interactive(mpl_show_surface, positions=(500, 500))
interactive(children=(FloatSlider(value=1.1883700000000001, description='Blechdicke', max=1.48, min=0.99, step…